Scikit-Plot Repo

Examples from this link

pip install scikit-plot


In [ ]:
%pylab inline
pylab.rcParams['figure.figsize'] = (14, 14)

In [ ]:
from sklearn.datasets import load_digits as load_data
from sklearn.naive_bayes import GaussianNB

In [ ]:
# This is all that's needed for scikit-plot
import matplotlib.pyplot as plt
from scikitplot import classifier_factory

In [ ]:
# Load data
X, y = load_data(return_X_y=True)

In [ ]:
# Regular instance using GaussianNB class
nb = GaussianNB()

In [ ]:
# Modification of instance of Scikit-Learn
classifier_factory(nb)

In [ ]:
# An object of Scikit-Learn using the modified version that can use a method plot_roc_curve
nb.plot_roc_curve(X, y, random_state=1)

In [ ]:
# Display plot
plt.show()

In [ ]:


In [ ]:
from sklearn.ensemble import RandomForestClassifier

In [ ]:
random_forest_clf = RandomForestClassifier(n_estimators=5, max_depth=5, random_state=1)

In [ ]:
from scikitplot import classifier_factory

In [ ]:
classifier_factory(random_forest_clf)

In [ ]:
random_forest_clf.plot_confusion_matrix(X, y, normalize=True)

In [ ]:
plt.show()

In [ ]:


In [ ]:
from scikitplot import plotters as skplt

In [ ]:
rf = RandomForestClassifier()

In [ ]:
rf = rf.fit(X, y)

In [ ]:
preds = rf.predict(X)

In [ ]:
skplt.plot_confusion_matrix(y_true=y, y_pred=preds)
plt.show()

In [ ]:


In [ ]:


In [ ]:
from __future__ import absolute_import
import matplotlib.pyplot as plt
from scikitplot import clustering_factory
from sklearn.cluster import KMeans
from sklearn.datasets import load_iris as load_data

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
kmeans = clustering_factory(KMeans(random_state=1))

In [ ]:
kmeans.plot_elbow_curve(X, cluster_ranges=range(1, 11))
plt.show()

In [ ]:


In [ ]:


In [ ]:
from sklearn.ensemble import RandomForestClassifier
from sklearn.datasets import load_iris as load_data
import matplotlib.pyplot as plt
from scikitplot import classifier_factory

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
rf = classifier_factory(RandomForestClassifier(random_state=1))

In [ ]:
rf.fit(X, y)

In [ ]:
rf.plot_feature_importances(feature_names=['petal length', 'petal width',
                                           'sepal length', 'sepal width'])
plt.show()

In [ ]:


In [ ]:


In [ ]:
from __future__ import absolute_import
import matplotlib.pyplot as plt
from scikitplot import classifier_factory
from sklearn.linear_model import LogisticRegression
from sklearn.datasets import load_breast_cancer as load_data

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
lr = classifier_factory(LogisticRegression())

In [ ]:
lr.plot_ks_statistic(X, y, random_state=1)
plt.show()

In [ ]:


In [ ]:
from scikitplot import plotters as skplt

In [ ]:
lr = LogisticRegression()

In [ ]:
lr = lr.fit(X, y)

In [ ]:
probas = lr.predict_proba(X)

In [ ]:
skplt.plot_ks_statistic(y_true=y, y_probas=probas)
plt.show()

In [ ]:


In [ ]:


In [ ]:
from __future__ import absolute_import
import matplotlib.pyplot as plt
from scikitplot import classifier_factory
from sklearn.ensemble import RandomForestClassifier
from sklearn.datasets import load_breast_cancer as load_data

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
rf = classifier_factory(RandomForestClassifier())

In [ ]:
rf.plot_learning_curve(X, y)
plt.show()

In [ ]:


In [ ]:
from scikitplot import plotters as skplt

In [ ]:
rf = RandomForestClassifier()

In [ ]:
skplt.plot_learning_curve(rf, X, y)
plt.show()

In [ ]:


In [ ]:


In [ ]:
from sklearn.decomposition import PCA
from sklearn.datasets import load_digits as load_data
import scikitplot.plotters as skplt
import matplotlib.pyplot as plt

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
pca = PCA(random_state=1)

In [ ]:
pca.fit(X)

In [ ]:
skplt.plot_pca_2d_projection(pca, X, y)
plt.show()

In [ ]:


In [ ]:


In [ ]:
from sklearn.decomposition import PCA
from sklearn.datasets import load_digits as load_data
import scikitplot.plotters as skplt
import matplotlib.pyplot as plt

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
pca = PCA(random_state=1)

In [ ]:
pca.fit(X)

In [ ]:
skplt.plot_pca_component_variance(pca)
plt.show()

In [ ]:


In [ ]:


In [ ]:
from __future__ import absolute_import
import matplotlib.pyplot as plt
from scikitplot import classifier_factory
from sklearn.naive_bayes import GaussianNB
from sklearn.datasets import load_digits as load_data

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
nb = classifier_factory(GaussianNB())

In [ ]:
nb.plot_precision_recall_curve(X, y, random_state=1)
plt.show()

In [ ]:


In [ ]:


In [ ]:
from __future__ import absolute_import
import matplotlib.pyplot as plt
from scikitplot import classifier_factory
from sklearn.naive_bayes import GaussianNB
from sklearn.datasets import load_digits as load_data

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
nb = classifier_factory(GaussianNB())

In [ ]:
nb.plot_roc_curve(X, y, random_state=1)
plt.show()

In [ ]:


In [ ]:
from scikitplot import plotters as skplt

In [ ]:
nb = GaussianNB()

In [ ]:
nb = nb.fit(X, y)

In [ ]:
probas = nb.predict_proba(X)

In [ ]:
skplt.plot_roc_curve(y_true=y, y_probas=probas)
plt.show()

In [ ]:


In [ ]:


In [ ]:


In [ ]:
from __future__ import absolute_import
import matplotlib.pyplot as plt
from scikitplot import clustering_factory
from sklearn.cluster import KMeans
from sklearn.datasets import load_iris as load_data

In [ ]:
X, y = load_data(return_X_y=True)

In [ ]:
kmeans = clustering_factory(KMeans(n_clusters=4, random_state=1))

In [ ]:
kmeans.plot_silhouette(X)
plt.show()

In [ ]:


In [ ]: